from nsepy import get_history
from datetime import date
import pandas as pd
import numpy as np
import seaborn as sns
sns=sns.set()
import matplotlib.pyplot as plt
%matplotlib inline
import warnings
warnings.filterwarnings('ignore')
#import pandas_datareader as pdr/
stock_type=input('want to check index?: ' )
if stock_type=='YES':
df= get_history(symbol=input('Enter stock name: ' ),
start=date(2017,1,1),
end=date.today(),
index=True)
elif stock_type=='NO':
df= get_history(symbol=input('Enter stock name: '),
start=date(2020,1,1),
end=date.today())
else:
print('Please enter valid stock symbol or INDEX ')
want to check index?: YES Enter stock name: BANKNIFTY
df.head()
| Open | High | Low | Close | Volume | Turnover | |
|---|---|---|---|---|---|---|
| Date | ||||||
| 2017-01-02 | 18242.30 | 18249.00 | 17844.90 | 17969.60 | 74444730.0 | 1.857240e+10 |
| 2017-01-03 | 18002.75 | 18115.05 | 17830.95 | 18035.60 | 64513818.0 | 1.769300e+10 |
| 2017-01-04 | 18037.45 | 18092.85 | 17868.90 | 17891.00 | 50508161.0 | 1.615850e+10 |
| 2017-01-05 | 18000.75 | 18164.05 | 17977.80 | 18115.95 | 68874940.0 | 2.271180e+10 |
| 2017-01-06 | 18168.45 | 18325.50 | 18157.30 | 18264.00 | 65919927.0 | 2.174590e+10 |
df=df[['Open','High','Low','Close','Volume','Turnover']]
df.head()
| Open | High | Low | Close | Volume | Turnover | |
|---|---|---|---|---|---|---|
| Date | ||||||
| 2020-01-01 | 334.70 | 335.95 | 332.15 | 334.45 | 17379320 | 5.805130e+14 |
| 2020-01-02 | 334.50 | 339.85 | 333.35 | 339.30 | 20324236 | 6.845341e+14 |
| 2020-01-03 | 337.95 | 337.95 | 332.00 | 333.70 | 21853208 | 7.322600e+14 |
| 2020-01-06 | 331.70 | 331.70 | 317.70 | 319.00 | 35645325 | 1.147882e+15 |
| 2020-01-07 | 324.45 | 327.00 | 315.40 | 318.40 | 50966826 | 1.632868e+15 |
#shape of data
df.shape
(1423, 6)
df.info()
<class 'pandas.core.frame.DataFrame'> Index: 1423 entries, 2017-01-02 to 2022-09-28 Data columns (total 6 columns): # Column Non-Null Count Dtype --- ------ -------------- ----- 0 Open 1423 non-null float64 1 High 1423 non-null float64 2 Low 1423 non-null float64 3 Close 1423 non-null float64 4 Volume 1412 non-null float64 5 Turnover 1412 non-null float64 dtypes: float64(6) memory usage: 77.8+ KB
df.isnull().sum()
Open 0 High 0 Low 0 Close 0 Volume 11 Turnover 11 dtype: int64
df['Turnover']
Date
2017-01-02 1.857240e+10
2017-01-03 1.769300e+10
2017-01-04 1.615850e+10
2017-01-05 2.271180e+10
2017-01-06 2.174590e+10
...
2022-09-22 5.555900e+10
2022-09-23 6.487980e+10
2022-09-26 6.410060e+10
2022-09-27 5.503350e+10
2022-09-28 5.500600e+10
Name: Turnover, Length: 1423, dtype: float64
df.drop(['Turnover'], axis=1,inplace=True)
df.dropna(inplace=True)
df['Open'].plot(figsize=(16,6))
plt.title(' OPEN VALUE OVER PERIOD OF TIME',fontsize=15)
plt.xlabel('YEAR',fontsize=15)
plt.ylabel('NIFTY VALUE',fontsize=15)
Text(0, 0.5, 'NIFTY VALUE')
df1=df.copy()
from datetime import datetime
df1.index = pd.to_datetime(df1.index)
df1.resample(rule='A').min()
| Open | High | Low | Close | Volume | |
|---|---|---|---|---|---|
| Date | |||||
| 2020-12-31 | 151.95 | 153.2 | 149.45 | 150.85 | 7753219 |
| 2021-12-31 | 271.90 | 280.0 | 269.50 | 275.65 | 8639835 |
| 2022-12-31 | 438.00 | 442.1 | 425.00 | 434.70 | 6945602 |
df1.resample(rule='QS').min()['Open'].plot(figsize=(16,6))
plt.title('INDIAN NIFTY MINIMUM VALUE PER QUARTER',fontsize=15)
plt.xlabel('YEAR',fontsize=15)
plt.ylabel('NIFTY VALUE',fontsize=15)
Text(0, 0.5, 'NIFTY VALUE')
df1_min=df1.resample(rule='QS').min()['Open']
df1_min.head()
Date 2020-01-01 178.00 2020-04-01 151.95 2020-07-01 179.00 2020-10-01 187.00 2021-01-01 271.90 Freq: QS-JAN, Name: Open, dtype: float64
df1_max=df1.resample(rule='QS').max()['Open']
df1_max.head()
Date 2020-01-01 337.95 2020-04-01 197.00 2020-07-01 231.55 2020-10-01 278.00 2021-01-01 425.05 Freq: QS-JAN, Name: Open, dtype: float64
df_min_max=pd.merge(df1_min, df1_max, left_index=True, right_index=True)
df_min_max.head()
| Open_x | Open_y | |
|---|---|---|
| Date | ||
| 2020-01-01 | 178.00 | 337.95 |
| 2020-04-01 | 151.95 | 197.00 |
| 2020-07-01 | 179.00 | 231.55 |
| 2020-10-01 | 187.00 | 278.00 |
| 2021-01-01 | 271.90 | 425.05 |
df_min_max['Open_x'].plot(kind='bar',figsize=(16,6))
<AxesSubplot:xlabel='Date'>
df1.plot(subplots = True, figsize = (12, 12));
plt.legend(loc = 'best')
plt.show()
we can see deep in market in 2020 due to covid but after few months markte recoverd so fast and even made all time high
we can see increase in volumes after covid in 2020 (Volume is the amount of an asset or security that changes hands over some period of time, often over the course of a day. For instance, stock trading volume would refer to the number of shares of a security traded between its daily open and close.)
df1.head()
| Open | High | Low | Close | Volume | |
|---|---|---|---|---|---|
| Date | |||||
| 2020-01-01 | 334.70 | 335.95 | 332.15 | 334.45 | 17379320 |
| 2020-01-02 | 334.50 | 339.85 | 333.35 | 339.30 | 20324236 |
| 2020-01-03 | 337.95 | 337.95 | 332.00 | 333.70 | 21853208 |
| 2020-01-06 | 331.70 | 331.70 | 317.70 | 319.00 | 35645325 |
| 2020-01-07 | 324.45 | 327.00 | 315.40 | 318.40 | 50966826 |
import mplfinance as mpf
Nifty 50 behavior during each time unit is represented in the form of a candle. If the closing price of a stock is higher than open price during a particular time period, then the candle is green, if the close price is below the open price then the candle is red. Each candle has a body and two wicks. The distance between open to close is represented by the body of a candle and the upper and lower wicks represent the highs and lows of a candle.
mpf.plot(data=df1,type='candle',mav=(300),volume=True,tight_layout=True,figsize=(20,12),style='yahoo')
from prophet import Prophet
import warnings
warnings.filterwarnings('ignore') # Hide warnings
import datetime as dt
import pandas as pd
pd.core.common.is_list_like = pd.api.types.is_list_like
import numpy as np
import matplotlib.pyplot as plt
import seaborn as sns
import matplotlib.dates as mdates
from nsepy import get_history
from datetime import date
df.reset_index(inplace=True)
data=df[["Date","Close"]]
df
| Date | Open | High | Low | Close | Volume | |
|---|---|---|---|---|---|---|
| 0 | 2020-01-01 | 334.70 | 335.95 | 332.15 | 334.45 | 17379320 |
| 1 | 2020-01-02 | 334.50 | 339.85 | 333.35 | 339.30 | 20324236 |
| 2 | 2020-01-03 | 337.95 | 337.95 | 332.00 | 333.70 | 21853208 |
| 3 | 2020-01-06 | 331.70 | 331.70 | 317.70 | 319.00 | 35645325 |
| 4 | 2020-01-07 | 324.45 | 327.00 | 315.40 | 318.40 | 50966826 |
| ... | ... | ... | ... | ... | ... | ... |
| 679 | 2022-09-22 | 566.75 | 574.00 | 562.00 | 567.30 | 11869435 |
| 680 | 2022-09-23 | 564.80 | 568.00 | 549.50 | 550.60 | 12563239 |
| 681 | 2022-09-26 | 545.80 | 549.00 | 533.00 | 543.30 | 16158750 |
| 682 | 2022-09-27 | 544.55 | 550.90 | 533.20 | 536.50 | 11066799 |
| 683 | 2022-09-28 | 532.90 | 539.80 | 520.35 | 524.85 | 12233485 |
684 rows × 6 columns
data=data.rename(columns={"Date": "ds", "Close": "y"})
len(data)
684
a=len(data)*80/100
a=int(a)
print(a)
547
df_train=data[0:a]
df_test=data[a:]
#df_train=data[0:600]
#df_test=data[600:682]
fitting model
# define the model
model = Prophet()
# fit the model
model.fit(df_train)
00:11:53 - cmdstanpy - INFO - Chain [1] start processing 00:11:54 - cmdstanpy - INFO - Chain [1] done processing
<prophet.forecaster.Prophet at 0x231f573c220>
forecast = model.predict(df_test)
forecast[['ds', 'yhat', 'yhat_lower', 'yhat_upper']]
| ds | yhat | yhat_lower | yhat_upper | |
|---|---|---|---|---|
| 0 | 2022-03-11 | 461.545223 | 441.402895 | 481.943996 |
| 1 | 2022-03-14 | 451.290021 | 432.170012 | 470.481047 |
| 2 | 2022-03-15 | 450.276928 | 430.828872 | 468.517713 |
| 3 | 2022-03-16 | 449.049043 | 429.430760 | 469.675613 |
| 4 | 2022-03-17 | 446.347518 | 426.434817 | 465.800359 |
| ... | ... | ... | ... | ... |
| 132 | 2022-09-22 | 473.517644 | 406.161675 | 551.080201 |
| 133 | 2022-09-23 | 471.562586 | 401.366896 | 546.621567 |
| 134 | 2022-09-26 | 470.758254 | 402.348109 | 547.737094 |
| 135 | 2022-09-27 | 472.714958 | 401.293175 | 550.218102 |
| 136 | 2022-09-28 | 474.389872 | 404.252811 | 553.267494 |
137 rows × 4 columns
fig1 = model.plot(forecast)
y_pred = forecast['yhat'].values
y_true = df_test['y'].values
from sklearn.metrics import mean_squared_error, mean_absolute_error
from sklearn.metrics import mean_squared_error, r2_score
import math
print('MSE: %.3f' % mean_squared_error(y_true, y_pred))
print('R2: %.3f' % r2_score(y_true, y_pred))
MSE: 4339.327 R2: -2.847
# report performance
mse = mean_squared_error(y_true, y_pred)
print('MSE: '+str(mse))
mae = mean_absolute_error(y_true, y_pred)
print('MAE: '+str(mae))
rmse = math.sqrt(mean_squared_error(y_true, y_pred))
print('RMSE: '+str(rmse))
mape = np.mean(np.abs(y_pred - y_true)/np.abs(y_true))
print('MAPE: '+str(mape))
MSE: 4339.326770079902 MAE: 59.16928749357548 RMSE: 65.87356655047533 MAPE: 0.11616185625604351
import numpy as np
import pandas as pd
import tensorflow as tf
from tensorflow.keras.layers import Dense, LSTM
from tensorflow.keras.models import Sequential
from sklearn.preprocessing import MinMaxScaler
pd.options.mode.chained_assignment = None
import yfinance as yf
import numpy as np # linear algebra
import pandas as pd # data processing, CSV file I/O (e.g. pd.read_csv)
from nsepy import get_history
from datetime import date
from plotly.offline import init_notebook_mode, iplot, plot
import plotly as py
import plotly.express as px
from plotly.subplots import make_subplots
from sklearn.metrics import roc_curve, auc
import plotly.graph_objs as go
import plotly.figure_factory as ff
init_notebook_mode()
import warnings
warnings.filterwarnings('ignore') # Hide warnings
import datetime as dt
import pandas as pd
pd.core.common.is_list_like = pd.api.types.is_list_like
import numpy as np
import matplotlib.pyplot as plt
import seaborn as sns
import matplotlib.dates as mdates
a=len(df)*80/100
a=int(a)
print(a)
547
train=df[0:a]
test=df[a:]
len(test)#.tail()8
137
len(train)#.tail()
547
train.tail()
| Date | Open | High | Low | Close | Volume | |
|---|---|---|---|---|---|---|
| 542 | 2022-03-04 | 460.3 | 470.90 | 457.65 | 461.95 | 20684268 |
| 543 | 2022-03-07 | 447.5 | 453.95 | 433.45 | 440.30 | 33720472 |
| 544 | 2022-03-08 | 438.0 | 442.10 | 425.00 | 440.30 | 42510207 |
| 545 | 2022-03-09 | 442.0 | 454.30 | 438.55 | 451.70 | 32331325 |
| 546 | 2022-03-10 | 469.9 | 476.65 | 461.65 | 468.70 | 34553299 |
test.head()
| Date | Open | High | Low | Close | Volume | |
|---|---|---|---|---|---|---|
| 547 | 2022-03-11 | 466.4 | 474.8 | 462.00 | 470.35 | 22091309 |
| 548 | 2022-03-14 | 471.0 | 486.0 | 470.55 | 485.15 | 27750689 |
| 549 | 2022-03-15 | 489.0 | 490.5 | 480.65 | 485.85 | 27204899 |
| 550 | 2022-03-16 | 493.0 | 494.3 | 489.40 | 492.75 | 16211288 |
| 551 | 2022-03-17 | 499.0 | 503.5 | 498.30 | 501.90 | 22947673 |
training_set = train.iloc[:, 1:2].values
len(training_set)
547
from sklearn.preprocessing import MinMaxScaler
sc = MinMaxScaler(feature_range = (0, 1))
training_set_scaled = sc.fit_transform(training_set)
X_train = []
y_train = []
for i in range(60, len(training_set)):
X_train.append(training_set_scaled[i-60:i, 0])
y_train.append(training_set_scaled[i, 0])
X_train, y_train = np.array(X_train), np.array(y_train)
X_train = np.reshape(X_train, (X_train.shape[0], X_train.shape[1], 1))
from keras.models import Sequential
from keras.layers import Dense
from keras.layers import LSTM
from keras.layers import Dropout
regressor = Sequential()
regressor.add(LSTM(units = 50, return_sequences = True, input_shape = (X_train.shape[1], 1)))
regressor.add(Dropout(0.2))
regressor.add(LSTM(units = 50, return_sequences = True))
regressor.add(Dropout(0.2))
regressor.add(LSTM(units = 50, return_sequences = True))
regressor.add(Dropout(0.2))
regressor.add(LSTM(units = 50))
regressor.add(Dropout(0.2))
regressor.add(Dense(units = 1))
regressor.compile(optimizer = 'adam', loss = 'mean_squared_error')
regressor.fit(X_train, y_train, epochs = 100, batch_size = 32)
Epoch 1/100 16/16 [==============================] - 7s 70ms/step - loss: 0.0873 Epoch 2/100 16/16 [==============================] - 1s 70ms/step - loss: 0.0171 Epoch 3/100 16/16 [==============================] - 1s 73ms/step - loss: 0.0116 Epoch 4/100 16/16 [==============================] - 1s 71ms/step - loss: 0.0095 Epoch 5/100 16/16 [==============================] - 1s 65ms/step - loss: 0.0083 Epoch 6/100 16/16 [==============================] - 1s 72ms/step - loss: 0.0080 Epoch 7/100 16/16 [==============================] - 1s 66ms/step - loss: 0.0073 Epoch 8/100 16/16 [==============================] - 1s 66ms/step - loss: 0.0081 Epoch 9/100 16/16 [==============================] - 1s 66ms/step - loss: 0.0073 Epoch 10/100 16/16 [==============================] - 1s 67ms/step - loss: 0.0073 Epoch 11/100 16/16 [==============================] - 1s 68ms/step - loss: 0.0068 Epoch 12/100 16/16 [==============================] - 1s 67ms/step - loss: 0.0072 Epoch 13/100 16/16 [==============================] - 1s 78ms/step - loss: 0.0060 Epoch 14/100 16/16 [==============================] - 1s 80ms/step - loss: 0.0065 Epoch 15/100 16/16 [==============================] - 1s 80ms/step - loss: 0.0068 Epoch 16/100 16/16 [==============================] - 1s 80ms/step - loss: 0.0064 Epoch 17/100 16/16 [==============================] - 1s 79ms/step - loss: 0.0055 Epoch 18/100 16/16 [==============================] - 1s 80ms/step - loss: 0.0071 Epoch 19/100 16/16 [==============================] - 1s 79ms/step - loss: 0.0060 Epoch 20/100 16/16 [==============================] - 1s 79ms/step - loss: 0.0045 Epoch 21/100 16/16 [==============================] - 1s 80ms/step - loss: 0.0052 Epoch 22/100 16/16 [==============================] - 1s 78ms/step - loss: 0.0058 Epoch 23/100 16/16 [==============================] - 1s 79ms/step - loss: 0.0063 Epoch 24/100 16/16 [==============================] - 1s 82ms/step - loss: 0.0050 Epoch 25/100 16/16 [==============================] - 1s 78ms/step - loss: 0.0056 Epoch 26/100 16/16 [==============================] - 1s 79ms/step - loss: 0.0053 Epoch 27/100 16/16 [==============================] - 1s 80ms/step - loss: 0.0057 Epoch 28/100 16/16 [==============================] - 1s 78ms/step - loss: 0.0051 Epoch 29/100 16/16 [==============================] - 1s 79ms/step - loss: 0.0061 Epoch 30/100 16/16 [==============================] - 1s 78ms/step - loss: 0.0050 Epoch 31/100 16/16 [==============================] - 1s 78ms/step - loss: 0.0045 Epoch 32/100 16/16 [==============================] - 1s 80ms/step - loss: 0.0046 Epoch 33/100 16/16 [==============================] - 1s 77ms/step - loss: 0.0050 Epoch 34/100 16/16 [==============================] - 1s 78ms/step - loss: 0.0050 Epoch 35/100 16/16 [==============================] - 1s 78ms/step - loss: 0.0048 Epoch 36/100 16/16 [==============================] - 1s 78ms/step - loss: 0.0052 Epoch 37/100 16/16 [==============================] - 1s 80ms/step - loss: 0.0045 Epoch 38/100 16/16 [==============================] - 1s 78ms/step - loss: 0.0040 Epoch 39/100 16/16 [==============================] - 1s 77ms/step - loss: 0.0048 Epoch 40/100 16/16 [==============================] - 1s 78ms/step - loss: 0.0047 Epoch 41/100 16/16 [==============================] - 1s 77ms/step - loss: 0.0043 Epoch 42/100 16/16 [==============================] - 1s 78ms/step - loss: 0.0042 Epoch 43/100 16/16 [==============================] - 1s 78ms/step - loss: 0.0050 Epoch 44/100 16/16 [==============================] - 1s 77ms/step - loss: 0.0042 Epoch 45/100 16/16 [==============================] - 1s 84ms/step - loss: 0.0049 Epoch 46/100 16/16 [==============================] - 1s 85ms/step - loss: 0.0045 Epoch 47/100 16/16 [==============================] - 1s 84ms/step - loss: 0.0049 Epoch 48/100 16/16 [==============================] - 1s 83ms/step - loss: 0.0046 Epoch 49/100 16/16 [==============================] - 1s 78ms/step - loss: 0.0046 Epoch 50/100 16/16 [==============================] - 1s 78ms/step - loss: 0.0042 Epoch 51/100 16/16 [==============================] - 1s 81ms/step - loss: 0.0041 Epoch 52/100 16/16 [==============================] - 1s 78ms/step - loss: 0.0044 Epoch 53/100 16/16 [==============================] - 1s 77ms/step - loss: 0.0040 Epoch 54/100 16/16 [==============================] - 1s 79ms/step - loss: 0.0040 Epoch 55/100 16/16 [==============================] - 1s 76ms/step - loss: 0.0041 Epoch 56/100 16/16 [==============================] - 1s 77ms/step - loss: 0.0051 Epoch 57/100 16/16 [==============================] - 1s 79ms/step - loss: 0.0044 Epoch 58/100 16/16 [==============================] - 1s 82ms/step - loss: 0.0043 Epoch 59/100 16/16 [==============================] - 1s 77ms/step - loss: 0.0045 Epoch 60/100 16/16 [==============================] - 1s 77ms/step - loss: 0.0035 Epoch 61/100 16/16 [==============================] - 1s 78ms/step - loss: 0.0039 Epoch 62/100 16/16 [==============================] - 1s 78ms/step - loss: 0.0036 Epoch 63/100 16/16 [==============================] - 1s 76ms/step - loss: 0.0035 Epoch 64/100 16/16 [==============================] - 1s 78ms/step - loss: 0.0037 Epoch 65/100 16/16 [==============================] - 1s 78ms/step - loss: 0.0037 Epoch 66/100 16/16 [==============================] - 1s 77ms/step - loss: 0.0036 Epoch 67/100 16/16 [==============================] - 1s 77ms/step - loss: 0.0033 Epoch 68/100 16/16 [==============================] - 1s 79ms/step - loss: 0.0038 Epoch 69/100 16/16 [==============================] - 1s 77ms/step - loss: 0.0034 Epoch 70/100 16/16 [==============================] - 1s 79ms/step - loss: 0.0034 Epoch 71/100 16/16 [==============================] - 1s 77ms/step - loss: 0.0035 Epoch 72/100 16/16 [==============================] - 1s 77ms/step - loss: 0.0032 Epoch 73/100 16/16 [==============================] - 1s 77ms/step - loss: 0.0037 Epoch 74/100 16/16 [==============================] - 1s 77ms/step - loss: 0.0033 Epoch 75/100 16/16 [==============================] - 1s 78ms/step - loss: 0.0033 Epoch 76/100 16/16 [==============================] - 1s 77ms/step - loss: 0.0037 Epoch 77/100 16/16 [==============================] - 1s 77ms/step - loss: 0.0034 Epoch 78/100 16/16 [==============================] - 1s 80ms/step - loss: 0.0031 Epoch 79/100 16/16 [==============================] - 1s 85ms/step - loss: 0.0034 Epoch 80/100 16/16 [==============================] - 1s 80ms/step - loss: 0.0033 Epoch 81/100 16/16 [==============================] - 1s 88ms/step - loss: 0.0032 Epoch 82/100 16/16 [==============================] - 1s 85ms/step - loss: 0.0033 Epoch 83/100 16/16 [==============================] - 1s 88ms/step - loss: 0.0036 Epoch 84/100 16/16 [==============================] - 1s 88ms/step - loss: 0.0034 Epoch 85/100 16/16 [==============================] - 1s 80ms/step - loss: 0.0030 Epoch 86/100 16/16 [==============================] - 1s 83ms/step - loss: 0.0035 Epoch 87/100 16/16 [==============================] - 1s 84ms/step - loss: 0.0034 Epoch 88/100 16/16 [==============================] - 1s 81ms/step - loss: 0.0030 Epoch 89/100 16/16 [==============================] - 1s 89ms/step - loss: 0.0030 Epoch 90/100 16/16 [==============================] - 1s 89ms/step - loss: 0.0034 Epoch 91/100 16/16 [==============================] - 1s 79ms/step - loss: 0.0035 Epoch 92/100 16/16 [==============================] - 1s 79ms/step - loss: 0.0029 Epoch 93/100 16/16 [==============================] - 1s 82ms/step - loss: 0.0032 Epoch 94/100 16/16 [==============================] - 1s 82ms/step - loss: 0.0031 Epoch 95/100 16/16 [==============================] - 1s 82ms/step - loss: 0.0026 Epoch 96/100 16/16 [==============================] - 1s 88ms/step - loss: 0.0031 Epoch 97/100 16/16 [==============================] - 1s 90ms/step - loss: 0.0028 Epoch 98/100 16/16 [==============================] - 1s 89ms/step - loss: 0.0028 Epoch 99/100 16/16 [==============================] - 1s 87ms/step - loss: 0.0033 Epoch 100/100 16/16 [==============================] - 1s 84ms/step - loss: 0.0031
<keras.callbacks.History at 0x23187a54880>
dataset_test=test
len(dataset_test)
137
real_stock_price = dataset_test.iloc[:, 1:2].values
real_stock_price.shape
(137, 1)
dataset_total = pd.concat((df['Open'], dataset_test['Open']), axis = 0)
inputs = dataset_total[len(dataset_total) - len(dataset_test) - 60:].values
inputs = inputs.reshape(-1,1)
inputs = sc.transform(inputs)
X_test = []
for i in range(60, len(real_stock_price)+60):
X_test.append(inputs[i-60:i, 0])
X_test = np.array(X_test)
X_test = np.reshape(X_test, (X_test.shape[0], X_test.shape[1], 1))
predicted_stock_price = regressor.predict(X_test)
predicted_stock_price = sc.inverse_transform(predicted_stock_price)
5/5 [==============================] - 1s 23ms/step
predicted_stock_price.shape
(137, 1)
future = model.make_future_dataframe(periods=48,freq = 'H')
future.tail()
| ds | |
|---|---|
| 590 | 2022-03-11 20:00:00 |
| 591 | 2022-03-11 21:00:00 |
| 592 | 2022-03-11 22:00:00 |
| 593 | 2022-03-11 23:00:00 |
| 594 | 2022-03-12 00:00:00 |
plt.plot(real_stock_price, color = 'black', label = ' Stock Price')
plt.plot(predicted_stock_price, color = 'green', label = 'Predicted Stock Price')
plt.title(' Stock Price Prediction')
plt.xlabel('Date')
plt.ylabel(' Stock Price')
plt.legend()
plt.show()
mse = mean_squared_error(real_stock_price, predicted_stock_price)
print('MSE: '+str(mse))
mae = mean_absolute_error(real_stock_price, predicted_stock_price)
print('MAE: '+str(mae))
rmse = math.sqrt(mean_squared_error(real_stock_price, predicted_stock_price))
print('RMSE: '+str(rmse))
mape = np.mean(np.abs(predicted_stock_price - real_stock_price)/np.abs(real_stock_price))
print('MAPE: '+str(mape))
MSE: 424.22564146136494 MAE: 16.173544601106293 RMSE: 20.596738612250363 MAPE: 0.031609390451010216
y = df['Close'].fillna(method='ffill')
y = y.values.reshape(-1, 1)
len(y)
684
# scale the data
scaler = MinMaxScaler(feature_range=(0, 1))
scaler = scaler.fit(y)
y = scaler.transform(y)
# generate the input and output sequences
n_lookback =50 # length of input sequences (lookback period)
n_forecast =50 # length of output sequences (forecast period)
X = []
Y = []
for i in range(n_lookback, len(y) - n_forecast + 1):
X.append(y[i - n_lookback: i])
Y.append(y[i: i + n_forecast])
X = np.array(X)
Y = np.array(Y)
model = Sequential()
model.add(LSTM(units=50, return_sequences=True, input_shape=(n_lookback, 1)))
model.add(LSTM(units=50))
model.add(Dense(n_forecast))
model.compile(loss='mean_squared_error', optimizer='adam')
model.fit(X, Y, epochs=100, batch_size=32, verbose=0)
<keras.callbacks.History at 0x23193b30100>
# generate the forecasts
X_ = y[- n_lookback:] # last available input sequence
X_ = X_.reshape(1, n_lookback, 1)
# fit the model
Y_ = model.predict(X_).reshape(-1, 1)
Y_ = scaler.inverse_transform(Y_)
1/1 [==============================] - 1s 660ms/step
# organize the results in a data frame
df_past = df[['Close']].reset_index()
df_past.rename(columns={'index': 'Date', 'Close': 'Actual'}, inplace=True)
df_past['Date'] = pd.to_datetime(df_past['Date'])
df_past['Forecast'] = np.nan
df_past['Forecast'].iloc[-1] = df_past['Actual'].iloc[-1]
df_future = pd.DataFrame(columns=['Date', 'Actual', 'Forecast'])
df_future['Date'] = pd.date_range(start=df_past['Date'].iloc[-1] + pd.Timedelta(days=1), periods=n_forecast)
df_future['Forecast'] = Y_.flatten()
df_future['Actual'] = np.nan
results = df_past.append(df_future).set_index('Date')
#plot the results
results.plot(title='prediction')
<AxesSubplot:title={'center':'prediction'}, xlabel='Date'>
results.reset_index(inplace=True)
df_past
| Date | Actual | Forecast | |
|---|---|---|---|
| 0 | 1970-01-01 00:00:00.000000000 | 334.45 | NaN |
| 1 | 1970-01-01 00:00:00.000000001 | 339.30 | NaN |
| 2 | 1970-01-01 00:00:00.000000002 | 333.70 | NaN |
| 3 | 1970-01-01 00:00:00.000000003 | 319.00 | NaN |
| 4 | 1970-01-01 00:00:00.000000004 | 318.40 | NaN |
| ... | ... | ... | ... |
| 679 | 1970-01-01 00:00:00.000000679 | 567.30 | NaN |
| 680 | 1970-01-01 00:00:00.000000680 | 550.60 | NaN |
| 681 | 1970-01-01 00:00:00.000000681 | 543.30 | NaN |
| 682 | 1970-01-01 00:00:00.000000682 | 536.50 | NaN |
| 683 | 1970-01-01 00:00:00.000000683 | 524.85 | 524.85 |
684 rows × 3 columns
test_forecast = results.loc[results['Date']> '2022-9-23']
pd.concat([df_past.set_index('Date'),results.set_index('Date')],axis=1).plot(figsize=(10,5))
<AxesSubplot:xlabel='Date'>
pd.concat([df_past,results],axis=1).plot(figsize=(15,5))
<AxesSubplot:>
import seaborn as sns
sns.heatmap(df.isnull(), cbar=False)
<AxesSubplot:>
import os
import warnings
warnings.filterwarnings('ignore')
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
plt.style.use('fivethirtyeight')
from pylab import rcParams
rcParams['figure.figsize'] = 10, 6
from statsmodels.tsa.stattools import adfuller
from statsmodels.tsa.seasonal import seasonal_decompose
from statsmodels.tsa.arima_model import ARIMA
from pmdarima.arima import auto_arima
from sklearn.metrics import mean_squared_error, mean_absolute_error
import math
import numpy as np
from nsepy import get_history
from datetime import date
def test_stationarity(timeseries):
#Determing rolling statistics
rolmean = timeseries.rolling(12).mean()
rolstd = timeseries.rolling(12).std()
#Plot rolling statistics:
plt.plot(timeseries, color='blue',label='Original')
plt.plot(rolmean, color='red', label='Rolling Mean')
plt.plot(rolstd, color='black', label = 'Rolling Std')
plt.legend(loc='best')
plt.title('Rolling Mean and Standard Deviation')
plt.show(block=False)
print("Results of dickey fuller test")
adft = adfuller(timeseries,autolag='AIC')
# output for dft will give us without defining what the values are.
#hence we manually write what values does it explains using a for loop
output = pd.Series(adft[0:4],index=['Test Statistics','p-value','No. of lags used','Number of observations used'])
for key,values in adft[4].items():
output['critical value (%s)'%key] = values
print(output)
test_stationarity(df.Close)
Results of dickey fuller test Test Statistics -0.717473 p-value 0.842172 No. of lags used 5.000000 Number of observations used 678.000000 critical value (1%) -3.440032 critical value (5%) -2.865812 critical value (10%) -2.569045 dtype: float64
import matplotlib.pyplot as plt
import statsmodels.api as sm
from statsmodels.graphics.tsaplots import plot_acf, plot_pacf
df.head()
| Date | Open | High | Low | Close | Volume | |
|---|---|---|---|---|---|---|
| 0 | 2020-01-01 | 334.70 | 335.95 | 332.15 | 334.45 | 17379320 |
| 1 | 2020-01-02 | 334.50 | 339.85 | 333.35 | 339.30 | 20324236 |
| 2 | 2020-01-03 | 337.95 | 337.95 | 332.00 | 333.70 | 21853208 |
| 3 | 2020-01-06 | 331.70 | 331.70 | 317.70 | 319.00 | 35645325 |
| 4 | 2020-01-07 | 324.45 | 327.00 | 315.40 | 318.40 | 50966826 |
fig, (ax1, ax2) = plt.subplots(1, 2,figsize=(16,6), dpi= 80)
plot_acf(df.Close.tolist(), ax=ax1, lags=50)
plot_pacf(df.Close.tolist(), ax=ax2, lags=20)
result = seasonal_decompose(df.Close, model='multiplicative', freq = 30)
fig = plt.figure()
fig = result.plot()
fig.set_size_inches(16, 9)
<Figure size 1000x600 with 0 Axes>
from pylab import rcParams
rcParams['figure.figsize'] = 10, 6
df_log = np.log(df.Close)
moving_avg = df_log.rolling(12).mean()
std_dev = df_log.rolling(12).std()
plt.legend(loc='best')
plt.title('Moving Average')
plt.plot(std_dev, color ="black", label = "Standard Deviation")
plt.plot(moving_avg, color="red", label = "Mean")
plt.legend()
plt.show()
No handles with labels found to put in legend.
#split data into train and training set
train_data, test_data = df_log[3:int(len(df_log)*0.9)], df_log[int(len(df_log)*0.9):]
plt.figure(figsize=(10,6))
plt.grid(True)
plt.xlabel('Dates')
plt.ylabel('Closing Prices')
plt.plot(df_log, 'green', label='Train data')
plt.plot(test_data, 'blue', label='Test data')
plt.legend()
<matplotlib.legend.Legend at 0x2319a929940>
model_autoARIMA = auto_arima(train_data, start_p=0, start_q=0,
test='adf', # use adftest to find optimal 'd'
max_p=3, max_q=3, # maximum p and q
m=1, # frequency of series
d=None, # let model determine 'd'
seasonal=False, # No Seasonality
start_P=0,
D=0,
trace=True,
error_action='ignore',
suppress_warnings=True,
stepwise=True)
print(model_autoARIMA.summary())
Performing stepwise search to minimize aic
ARIMA(0,1,0)(0,0,0)[0] intercept : AIC=-2771.058, Time=0.12 sec
ARIMA(1,1,0)(0,0,0)[0] intercept : AIC=-2769.636, Time=0.19 sec
ARIMA(0,1,1)(0,0,0)[0] intercept : AIC=-2769.592, Time=0.18 sec
ARIMA(0,1,0)(0,0,0)[0] : AIC=-2772.806, Time=0.04 sec
ARIMA(1,1,1)(0,0,0)[0] intercept : AIC=-2770.162, Time=0.68 sec
Best model: ARIMA(0,1,0)(0,0,0)[0]
Total fit time: 1.222 seconds
SARIMAX Results
==============================================================================
Dep. Variable: y No. Observations: 612
Model: SARIMAX(0, 1, 0) Log Likelihood 1387.403
Date: Thu, 29 Sep 2022 AIC -2772.806
Time: 00:15:23 BIC -2768.391
Sample: 0 HQIC -2771.089
- 612
Covariance Type: opg
==============================================================================
coef std err z P>|z| [0.025 0.975]
------------------------------------------------------------------------------
sigma2 0.0006 1.9e-05 32.866 0.000 0.001 0.001
===================================================================================
Ljung-Box (L1) (Q): 0.58 Jarque-Bera (JB): 680.19
Prob(Q): 0.45 Prob(JB): 0.00
Heteroskedasticity (H): 0.33 Skew: -0.41
Prob(H) (two-sided): 0.00 Kurtosis: 8.10
===================================================================================
Warnings:
[1] Covariance matrix calculated using the outer product of gradients (complex-step).
model_autoARIMA.plot_diagnostics(figsize=(15,8))
plt.show()
model = ARIMA(train_data, order=(0, 1, 0))
fitted = model.fit(disp=-1)
print(fitted.summary())
ARIMA Model Results
==============================================================================
Dep. Variable: D.Close No. Observations: 611
Model: ARIMA(0, 1, 0) Log Likelihood 1387.529
Method: css S.D. of innovations 0.025
Date: Thu, 29 Sep 2022 AIC -2771.058
Time: 00:15:23 BIC -2762.228
Sample: 1 HQIC -2767.623
==============================================================================
coef std err z P>|z| [0.025 0.975]
------------------------------------------------------------------------------
const 0.0005 0.001 0.501 0.616 -0.001 0.002
==============================================================================
# Forecast
fc, se, conf = fitted.forecast(len(test_data), alpha=0.05) # 95% confidence
fc_series = pd.Series(fc, index=test_data.index)
lower_series = pd.Series(conf[:, 0], index=test_data.index)
upper_series = pd.Series(conf[:, 1], index=test_data.index)
plt.figure(figsize=(12,5), dpi=100)
plt.plot(train_data, label='training')
plt.plot(test_data, color = 'blue', label='Actual Stock Price')
plt.plot(fc_series, color = 'orange',label='Predicted Stock Price')
plt.fill_between(lower_series.index, lower_series, upper_series,
color='k', alpha=.10)
plt.title(' Stock Price Prediction')
plt.xlabel('Time')
plt.ylabel('Actual Stock Price')
plt.legend(loc='upper left', fontsize=8)
plt.show()
# report performance
mse = mean_squared_error(test_data, fc)
print('MSE: '+str(mse))
mae = mean_absolute_error(test_data, fc)
print('MAE: '+str(mae))
rmse = math.sqrt(mean_squared_error(test_data, fc))
print('RMSE: '+str(rmse))
mape = np.mean(np.abs(fc - test_data)/np.abs(test_data))
print('MAPE: '+str(mape))
MSE: 0.02654534780686956 MAE: 0.15256451524289494 RMSE: 0.16292743110621233 MAPE: 0.02433567466665604
!pip install darts
Collecting darts Using cached darts-0.21.0-py3-none-any.whl (424 kB) Requirement already satisfied: requests>=2.22.0 in c:\users\wolverine\anaconda3\lib\site-packages (from darts) (2.26.0) Requirement already satisfied: numpy>=1.19.0 in c:\users\wolverine\anaconda3\lib\site-packages (from darts) (1.22.4) Requirement already satisfied: holidays>=0.11.1 in c:\users\wolverine\anaconda3\lib\site-packages (from darts) (0.16) Requirement already satisfied: scipy>=1.3.2 in c:\users\wolverine\anaconda3\lib\site-packages (from darts) (1.7.1) Collecting lightgbm>=2.2.3 Using cached lightgbm-3.3.2-py3-none-win_amd64.whl (1.0 MB) Requirement already satisfied: matplotlib>=3.3.0 in c:\users\wolverine\anaconda3\lib\site-packages (from darts) (3.4.3) Requirement already satisfied: torch>=1.8.0 in c:\users\wolverine\anaconda3\lib\site-packages (from darts) (1.12.1) Collecting tbats>=1.1.0 Using cached tbats-1.1.0-py3-none-any.whl (43 kB) Collecting statsforecast==0.6.0 Using cached statsforecast-0.6.0-py3-none-any.whl (44 kB) Requirement already satisfied: scikit-learn>=1.0.1 in c:\users\wolverine\anaconda3\lib\site-packages (from darts) (1.1.1) Requirement already satisfied: ipython>=5.0.0 in c:\users\wolverine\anaconda3\lib\site-packages (from darts) (7.29.0) Requirement already satisfied: prophet>=1.1 in c:\users\wolverine\anaconda3\lib\site-packages (from darts) (1.1.1) Requirement already satisfied: joblib>=0.16.0 in c:\users\wolverine\anaconda3\lib\site-packages (from darts) (1.1.0) Requirement already satisfied: tqdm>=4.60.0 in c:\users\wolverine\anaconda3\lib\site-packages (from darts) (4.62.3) Collecting xarray>=0.17.0 Using cached xarray-2022.9.0-py3-none-any.whl (943 kB) Requirement already satisfied: pandas>=1.0.5 in c:\users\wolverine\anaconda3\lib\site-packages (from darts) (1.3.4) Requirement already satisfied: statsmodels>=0.13.0 in c:\users\wolverine\anaconda3\lib\site-packages (from darts) (0.13.2) Collecting catboost>=1.0.6 Using cached catboost-1.1-cp39-none-win_amd64.whl (74.0 MB) Collecting nfoursid>=1.0.0 Using cached nfoursid-1.0.1-py3-none-any.whl (16 kB) Requirement already satisfied: pmdarima>=1.8.0 in c:\users\wolverine\anaconda3\lib\site-packages (from darts) (2.0.1) Collecting pytorch-lightning>=1.5.0 Using cached pytorch_lightning-1.7.7-py3-none-any.whl (708 kB) Requirement already satisfied: numba in c:\users\wolverine\anaconda3\lib\site-packages (from statsforecast==0.6.0->darts) (0.54.1) Requirement already satisfied: plotly in c:\users\wolverine\anaconda3\lib\site-packages (from catboost>=1.0.6->darts) (5.10.0) Requirement already satisfied: six in c:\users\wolverine\anaconda3\lib\site-packages (from catboost>=1.0.6->darts) (1.16.0) Collecting graphviz Using cached graphviz-0.20.1-py3-none-any.whl (47 kB) Requirement already satisfied: convertdate>=2.3.0 in c:\users\wolverine\anaconda3\lib\site-packages (from holidays>=0.11.1->darts) (2.4.0) Requirement already satisfied: python-dateutil in c:\users\wolverine\anaconda3\lib\site-packages (from holidays>=0.11.1->darts) (2.8.2) Requirement already satisfied: hijri-converter in c:\users\wolverine\anaconda3\lib\site-packages (from holidays>=0.11.1->darts) (2.2.4) Requirement already satisfied: korean-lunar-calendar in c:\users\wolverine\anaconda3\lib\site-packages (from holidays>=0.11.1->darts) (0.2.1) Requirement already satisfied: pymeeus<=1,>=0.3.13 in c:\users\wolverine\anaconda3\lib\site-packages (from convertdate>=2.3.0->holidays>=0.11.1->darts) (0.5.11) Requirement already satisfied: prompt-toolkit!=3.0.0,!=3.0.1,<3.1.0,>=2.0.0 in c:\users\wolverine\anaconda3\lib\site-packages (from ipython>=5.0.0->darts) (3.0.20) Requirement already satisfied: pygments in c:\users\wolverine\anaconda3\lib\site-packages (from ipython>=5.0.0->darts) (2.10.0) Requirement already satisfied: traitlets>=4.2 in c:\users\wolverine\anaconda3\lib\site-packages (from ipython>=5.0.0->darts) (5.1.0) Requirement already satisfied: colorama in c:\users\wolverine\anaconda3\lib\site-packages (from ipython>=5.0.0->darts) (0.4.4) Requirement already satisfied: decorator in c:\users\wolverine\anaconda3\lib\site-packages (from ipython>=5.0.0->darts) (5.1.0) Requirement already satisfied: setuptools>=18.5 in c:\users\wolverine\anaconda3\lib\site-packages (from ipython>=5.0.0->darts) (58.0.4)
WARNING: Ignoring invalid distribution -tatsmodels (c:\users\wolverine\anaconda3\lib\site-packages)
WARNING: Ignoring invalid distribution -tatsmodels (c:\users\wolverine\anaconda3\lib\site-packages)
WARNING: Ignoring invalid distribution -tatsmodels (c:\users\wolverine\anaconda3\lib\site-packages)
WARNING: Ignoring invalid distribution -tatsmodels (c:\users\wolverine\anaconda3\lib\site-packages)
ERROR: Cannot uninstall 'llvmlite'. It is a distutils installed project and thus we cannot accurately determine which files belong to it which would lead to only a partial uninstall.
WARNING: Ignoring invalid distribution -tatsmodels (c:\users\wolverine\anaconda3\lib\site-packages)
WARNING: Ignoring invalid distribution -tatsmodels (c:\users\wolverine\anaconda3\lib\site-packages)
WARNING: Ignoring invalid distribution -tatsmodels (c:\users\wolverine\anaconda3\lib\site-packages)
Requirement already satisfied: jedi>=0.16 in c:\users\wolverine\anaconda3\lib\site-packages (from ipython>=5.0.0->darts) (0.18.0)
Requirement already satisfied: backcall in c:\users\wolverine\anaconda3\lib\site-packages (from ipython>=5.0.0->darts) (0.2.0)
Requirement already satisfied: matplotlib-inline in c:\users\wolverine\anaconda3\lib\site-packages (from ipython>=5.0.0->darts) (0.1.2)
Requirement already satisfied: pickleshare in c:\users\wolverine\anaconda3\lib\site-packages (from ipython>=5.0.0->darts) (0.7.5)
Requirement already satisfied: parso<0.9.0,>=0.8.0 in c:\users\wolverine\anaconda3\lib\site-packages (from jedi>=0.16->ipython>=5.0.0->darts) (0.8.2)
Requirement already satisfied: wheel in c:\users\wolverine\anaconda3\lib\site-packages (from lightgbm>=2.2.3->darts) (0.37.0)
Requirement already satisfied: kiwisolver>=1.0.1 in c:\users\wolverine\anaconda3\lib\site-packages (from matplotlib>=3.3.0->darts) (1.3.1)
Requirement already satisfied: pyparsing>=2.2.1 in c:\users\wolverine\anaconda3\lib\site-packages (from matplotlib>=3.3.0->darts) (3.0.4)
Requirement already satisfied: cycler>=0.10 in c:\users\wolverine\anaconda3\lib\site-packages (from matplotlib>=3.3.0->darts) (0.10.0)
Requirement already satisfied: pillow>=6.2.0 in c:\users\wolverine\anaconda3\lib\site-packages (from matplotlib>=3.3.0->darts) (8.4.0)
Requirement already satisfied: pytz>=2017.3 in c:\users\wolverine\anaconda3\lib\site-packages (from pandas>=1.0.5->darts) (2021.3)
Requirement already satisfied: urllib3 in c:\users\wolverine\anaconda3\lib\site-packages (from pmdarima>=1.8.0->darts) (1.26.7)
Requirement already satisfied: Cython!=0.29.18,!=0.29.31,>=0.29 in c:\users\wolverine\anaconda3\lib\site-packages (from pmdarima>=1.8.0->darts) (0.29.28)
Requirement already satisfied: wcwidth in c:\users\wolverine\anaconda3\lib\site-packages (from prompt-toolkit!=3.0.0,!=3.0.1,<3.1.0,>=2.0.0->ipython>=5.0.0->darts) (0.2.5)
Requirement already satisfied: LunarCalendar>=0.0.9 in c:\users\wolverine\anaconda3\lib\site-packages (from prophet>=1.1->darts) (0.0.9)
Requirement already satisfied: setuptools-git>=1.2 in c:\users\wolverine\anaconda3\lib\site-packages (from prophet>=1.1->darts) (1.2)
Requirement already satisfied: cmdstanpy>=1.0.4 in c:\users\wolverine\anaconda3\lib\site-packages (from prophet>=1.1->darts) (1.0.7)
Requirement already satisfied: ujson in c:\users\wolverine\anaconda3\lib\site-packages (from cmdstanpy>=1.0.4->prophet>=1.1->darts) (4.0.2)
Requirement already satisfied: ephem>=3.7.5.3 in c:\users\wolverine\anaconda3\lib\site-packages (from LunarCalendar>=0.0.9->prophet>=1.1->darts) (4.1.3)
Collecting pyDeprecate>=0.3.1
Using cached pyDeprecate-0.3.2-py3-none-any.whl (10 kB)
Requirement already satisfied: tensorboard>=2.9.1 in c:\users\wolverine\anaconda3\lib\site-packages (from pytorch-lightning>=1.5.0->darts) (2.9.1)
Requirement already satisfied: PyYAML>=5.4 in c:\users\wolverine\anaconda3\lib\site-packages (from pytorch-lightning>=1.5.0->darts) (6.0)
Requirement already satisfied: fsspec[http]!=2021.06.0,>=2021.05.0 in c:\users\wolverine\anaconda3\lib\site-packages (from pytorch-lightning>=1.5.0->darts) (2021.10.1)
Requirement already satisfied: packaging>=17.0 in c:\users\wolverine\anaconda3\lib\site-packages (from pytorch-lightning>=1.5.0->darts) (21.3)
Requirement already satisfied: typing-extensions>=4.0.0 in c:\users\wolverine\anaconda3\lib\site-packages (from pytorch-lightning>=1.5.0->darts) (4.3.0)
Collecting torchmetrics>=0.7.0
Using cached torchmetrics-0.9.3-py3-none-any.whl (419 kB)
Collecting aiohttp
Using cached aiohttp-3.8.3-cp39-cp39-win_amd64.whl (323 kB)
Requirement already satisfied: idna<4,>=2.5 in c:\users\wolverine\anaconda3\lib\site-packages (from requests>=2.22.0->darts) (3.2)
Requirement already satisfied: certifi>=2017.4.17 in c:\users\wolverine\anaconda3\lib\site-packages (from requests>=2.22.0->darts) (2021.10.8)
Requirement already satisfied: charset-normalizer~=2.0.0 in c:\users\wolverine\anaconda3\lib\site-packages (from requests>=2.22.0->darts) (2.0.4)
Requirement already satisfied: threadpoolctl>=2.0.0 in c:\users\wolverine\anaconda3\lib\site-packages (from scikit-learn>=1.0.1->darts) (2.2.0)
Requirement already satisfied: patsy>=0.5.2 in c:\users\wolverine\anaconda3\lib\site-packages (from statsmodels>=0.13.0->darts) (0.5.2)
Requirement already satisfied: tensorboard-plugin-wit>=1.6.0 in c:\users\wolverine\anaconda3\lib\site-packages (from tensorboard>=2.9.1->pytorch-lightning>=1.5.0->darts) (1.8.1)
Requirement already satisfied: tensorboard-data-server<0.7.0,>=0.6.0 in c:\users\wolverine\anaconda3\lib\site-packages (from tensorboard>=2.9.1->pytorch-lightning>=1.5.0->darts) (0.6.1)
Requirement already satisfied: grpcio>=1.24.3 in c:\users\wolverine\anaconda3\lib\site-packages (from tensorboard>=2.9.1->pytorch-lightning>=1.5.0->darts) (1.48.0)
Requirement already satisfied: werkzeug>=1.0.1 in c:\users\wolverine\anaconda3\lib\site-packages (from tensorboard>=2.9.1->pytorch-lightning>=1.5.0->darts) (2.0.2)
Requirement already satisfied: absl-py>=0.4 in c:\users\wolverine\anaconda3\lib\site-packages (from tensorboard>=2.9.1->pytorch-lightning>=1.5.0->darts) (1.2.0)
Requirement already satisfied: protobuf<3.20,>=3.9.2 in c:\users\wolverine\anaconda3\lib\site-packages (from tensorboard>=2.9.1->pytorch-lightning>=1.5.0->darts) (3.19.4)
Requirement already satisfied: google-auth<3,>=1.6.3 in c:\users\wolverine\anaconda3\lib\site-packages (from tensorboard>=2.9.1->pytorch-lightning>=1.5.0->darts) (2.9.1)
Requirement already satisfied: google-auth-oauthlib<0.5,>=0.4.1 in c:\users\wolverine\anaconda3\lib\site-packages (from tensorboard>=2.9.1->pytorch-lightning>=1.5.0->darts) (0.4.6)
Requirement already satisfied: markdown>=2.6.8 in c:\users\wolverine\anaconda3\lib\site-packages (from tensorboard>=2.9.1->pytorch-lightning>=1.5.0->darts) (3.4.1)
Requirement already satisfied: cachetools<6.0,>=2.0.0 in c:\users\wolverine\anaconda3\lib\site-packages (from google-auth<3,>=1.6.3->tensorboard>=2.9.1->pytorch-lightning>=1.5.0->darts) (5.2.0)
Requirement already satisfied: pyasn1-modules>=0.2.1 in c:\users\wolverine\anaconda3\lib\site-packages (from google-auth<3,>=1.6.3->tensorboard>=2.9.1->pytorch-lightning>=1.5.0->darts) (0.2.8)
Requirement already satisfied: rsa<5,>=3.1.4 in c:\users\wolverine\anaconda3\lib\site-packages (from google-auth<3,>=1.6.3->tensorboard>=2.9.1->pytorch-lightning>=1.5.0->darts) (4.9)
Requirement already satisfied: requests-oauthlib>=0.7.0 in c:\users\wolverine\anaconda3\lib\site-packages (from google-auth-oauthlib<0.5,>=0.4.1->tensorboard>=2.9.1->pytorch-lightning>=1.5.0->darts) (1.3.1)
Requirement already satisfied: importlib-metadata>=4.4 in c:\users\wolverine\anaconda3\lib\site-packages (from markdown>=2.6.8->tensorboard>=2.9.1->pytorch-lightning>=1.5.0->darts) (4.8.1)
Requirement already satisfied: zipp>=0.5 in c:\users\wolverine\anaconda3\lib\site-packages (from importlib-metadata>=4.4->markdown>=2.6.8->tensorboard>=2.9.1->pytorch-lightning>=1.5.0->darts) (3.6.0)
Requirement already satisfied: pyasn1<0.5.0,>=0.4.6 in c:\users\wolverine\anaconda3\lib\site-packages (from pyasn1-modules>=0.2.1->google-auth<3,>=1.6.3->tensorboard>=2.9.1->pytorch-lightning>=1.5.0->darts) (0.4.8)
Requirement already satisfied: oauthlib>=3.0.0 in c:\users\wolverine\anaconda3\lib\site-packages (from requests-oauthlib>=0.7.0->google-auth-oauthlib<0.5,>=0.4.1->tensorboard>=2.9.1->pytorch-lightning>=1.5.0->darts) (3.2.0)
Requirement already satisfied: aiosignal>=1.1.2 in c:\users\wolverine\anaconda3\lib\site-packages (from aiohttp->fsspec[http]!=2021.06.0,>=2021.05.0->pytorch-lightning>=1.5.0->darts) (1.2.0)
Requirement already satisfied: attrs>=17.3.0 in c:\users\wolverine\anaconda3\lib\site-packages (from aiohttp->fsspec[http]!=2021.06.0,>=2021.05.0->pytorch-lightning>=1.5.0->darts) (21.2.0)
Requirement already satisfied: async-timeout<5.0,>=4.0.0a3 in c:\users\wolverine\anaconda3\lib\site-packages (from aiohttp->fsspec[http]!=2021.06.0,>=2021.05.0->pytorch-lightning>=1.5.0->darts) (4.0.2)
Requirement already satisfied: multidict<7.0,>=4.5 in c:\users\wolverine\anaconda3\lib\site-packages (from aiohttp->fsspec[http]!=2021.06.0,>=2021.05.0->pytorch-lightning>=1.5.0->darts) (6.0.2)
Requirement already satisfied: frozenlist>=1.1.1 in c:\users\wolverine\anaconda3\lib\site-packages (from aiohttp->fsspec[http]!=2021.06.0,>=2021.05.0->pytorch-lightning>=1.5.0->darts) (1.3.1)
Requirement already satisfied: yarl<2.0,>=1.0 in c:\users\wolverine\anaconda3\lib\site-packages (from aiohttp->fsspec[http]!=2021.06.0,>=2021.05.0->pytorch-lightning>=1.5.0->darts) (1.8.1)
Requirement already satisfied: llvmlite<0.38,>=0.37.0rc1 in c:\users\wolverine\anaconda3\lib\site-packages (from numba->statsforecast==0.6.0->darts) (0.37.0)
Collecting numba
Using cached numba-0.56.2-cp39-cp39-win_amd64.whl (2.5 MB)
Collecting llvmlite<0.40,>=0.39.0dev0
Using cached llvmlite-0.39.1-cp39-cp39-win_amd64.whl (23.2 MB)
Requirement already satisfied: tenacity>=6.2.0 in c:\users\wolverine\anaconda3\lib\site-packages (from plotly->catboost>=1.0.6->darts) (8.0.1)
Installing collected packages: llvmlite, aiohttp, torchmetrics, pyDeprecate, numba, graphviz, xarray, tbats, statsforecast, pytorch-lightning, nfoursid, lightgbm, catboost, darts
Attempting uninstall: llvmlite
Found existing installation: llvmlite 0.37.0
from darts import TimeSeries
from darts.models import NBEATSModel
import warnings
warnings.filterwarnings("ignore")
from nsepy import get_history
from datetime import date
import pandas as pd
import numpy as np
import seaborn as sns
sns=sns.set()
import matplotlib.pyplot as plt
%matplotlib inline
import pandas_datareader as pdr
--------------------------------------------------------------------------- ModuleNotFoundError Traceback (most recent call last) C:\Users\WOLVER~1\AppData\Local\Temp/ipykernel_10092/1435460239.py in <module> ----> 1 from darts import TimeSeries 2 from darts.models import NBEATSModel 3 4 import warnings 5 warnings.filterwarnings("ignore") ModuleNotFoundError: No module named 'darts'
data = df[['Close']]
df = data.reset_index()
df.head()
df['Date'] = pd.to_datetime(df['Date'])
df = df.set_index('Date')
df
df = df.resample('W').mean()
df = df.reset_index()
# Selecting on required col
df = df[['Date','Close']]
df.head()
df.isnull().sum()
from darts import TimeSeries
stock_data=TimeSeries.from_dataframe(df,time_col='Date',value_cols='Close',fill_missing_dates=True)
df['Date'] = pd.to_datetime(df['Date'])
from darts.models import NBEATSModel
nbeats_model = NBEATSModel(input_chunk_length=30, output_chunk_length=10)
nbeats_model.fit(stock_data,
epochs=100,
verbose=True)
!pip uninstall matplotlib
!pip install matplotlib==3.1.3
pred = nbeats_model.predict(n=30,series=stock_data)
pred.plot()
stock_data.plot()
pred.plot()